In [1]:
import numpy
import scipy.stats
from scipy.integrate import quad

In [2]:
from timeit import default_timer as timer

In [3]:
LOW_BOUND = -10.
UPP_BOUND = 10.

In [4]:
N = 30
M = 1000000

# Gaussian expectation vector
Mu = numpy.zeros(N)

# Gaussian variance-covariance matrix
Sigma = numpy.array([[ 4.09, -0.89, -1.12, -0.03,  0.21, -0.88,  0.15,  0.04, -1.01,
         0.88,  0.98,  0.26, -0.31,  0.94,  0.28, -0.05, -0.89, -1.09,
         0.14, -1.83, -0.1 , -0.76,  0.36,  0.24, -0.04, -0.18,  0.94,
         0.95,  1.01,  0.45],
       [-0.89,  3.17, -0.78, -0.32,  0.6 ,  0.34,  0.1 , -0.08,  0.6 ,
        -0.1 ,  0.59,  0.24, -0.22,  0.55, -0.29,  1.04,  0.03,  0.34,
         0.55,  0.9 , -0.39, -0.64, -1.  , -0.02, -0.64, -0.01, -0.11,
        -0.89,  1.11, -0.67],
       [-1.12, -0.78,  4.73,  0.01,  0.2 ,  1.36, -0.19, -0.05, -0.2 ,
        -1.22, -0.82, -0.01,  0.08, -0.55,  0.6 ,  0.14, -0.51, -1.24,
         1.05,  1.28,  0.95, -0.23,  2.24, -0.61,  0.64, -0.  , -0.52,
        -1.92, -1.38,  0.45],
       [-0.03, -0.32,  0.01,  3.83,  0.02, -1.1 , -0.22,  0.07, -0.59,
        -0.2 , -0.77,  0.1 ,  0.26, -0.25,  0.34,  1.38, -0.62, -0.29,
         0.33, -1.24,  0.42,  1.08,  1.14,  0.22, -1.03, -0.23,  0.2 ,
         1.36, -0.23, -0.91],
       [ 0.21,  0.6 ,  0.2 ,  0.02,  1.15, -0.1 ,  0.05,  0.05, -0.08,
        -0.6 , -0.09,  0.22, -0.07,  0.15, -0.06, -0.07, -0.46, -0.18,
         0.29,  0.12,  0.21,  0.1 , -0.21, -0.26, -0.05,  0.08, -0.26,
        -0.99, -0.23,  0.04],
       [-0.88,  0.34,  1.36, -1.1 , -0.1 ,  5.78,  0.2 , -0.04, -0.64,
        -0.31,  1.06,  0.32, -0.16, -0.71,  0.96,  0.88, -1.24, -0.18,
        -1.17, -1.07, -0.82, -0.51,  1.69, -1.13,  0.82,  0.49, -0.43,
        -0.5 , -0.04, -1.21],
       [ 0.15,  0.1 , -0.19, -0.22,  0.05,  0.2 ,  0.2 ,  0.02,  0.03,
        -0.  ,  0.02,  0.05, -0.03,  0.14,  0.01, -0.03, -0.06, -0.04,
        -0.12, -0.17, -0.04, -0.14, -0.01, -0.08,  0.07,  0.06, -0.01,
        -0.03,  0.14,  0.09],
       [ 0.04, -0.08, -0.05,  0.07,  0.05, -0.04,  0.02,  0.17,  0.04,
         0.04, -0.  , -0.05, -0.  , -0.01, -0.01, -0.14,  0.04, -0.08,
        -0.1 , -0.04,  0.03,  0.01,  0.06, -0.06,  0.01,  0.1 ,  0.01,
         0.12, -0.03,  0.05],
       [-1.01,  0.6 , -0.2 , -0.59, -0.08, -0.64,  0.03,  0.04,  2.68,
         0.36, -0.12, -0.44, -0.24, -0.57, -0.41, -0.69, -0.33,  0.46,
        -0.38,  0.72,  0.15, -0.38, -1.56,  0.09,  0.53, -0.17,  0.52,
         0.31, -0.77,  0.71],
       [ 0.88, -0.1 , -1.22, -0.2 , -0.6 , -0.31, -0.  ,  0.04,  0.36,
         3.09,  1.42, -0.15, -0.14,  0.29,  0.39, -0.54,  0.92,  0.22,
        -0.41,  0.32, -0.61, -0.3 , -0.14,  0.72, -0.13, -0.57,  0.01,
         2.19,  1.69, -0.38],
       [ 0.98,  0.59, -0.82, -0.77, -0.09,  1.06,  0.02, -0.  , -0.12,
         1.42,  2.2 ,  0.27, -0.42, -0.12,  0.09, -0.41,  0.15, -0.31,
        -0.3 ,  0.09, -0.71, -0.43,  0.33,  0.3 , -0.07, -0.14,  0.23,
         1.4 ,  1.33, -0.  ],
       [ 0.26,  0.24, -0.01,  0.1 ,  0.22,  0.32,  0.05, -0.05, -0.44,
        -0.15,  0.27,  0.55,  0.01,  0.29,  0.1 ,  0.12,  0.01, -0.21,
        -0.09, -0.13,  0.1 ,  0.12,  0.46, -0.16, -0.03, -0.11, -0.11,
        -0.11,  0.49,  0.08],
       [-0.31, -0.22,  0.08,  0.26, -0.07, -0.16, -0.03, -0.  , -0.24,
        -0.14, -0.42,  0.01,  0.55,  0.02,  0.18,  0.18,  0.25,  0.09,
        -0.16,  0.1 ,  0.06,  0.1 , -0.01, -0.03, -0.07, -0.11, -0.2 ,
        -0.14,  0.07, -0.02],
       [ 0.94,  0.55, -0.55, -0.25,  0.15, -0.71,  0.14, -0.01, -0.57,
         0.29, -0.12,  0.29,  0.02,  1.97,  0.1 ,  0.23,  0.81, -0.21,
        -0.1 , -0.1 ,  0.5 , -0.25,  0.02, -0.33,  0.23, -0.04,  0.27,
        -0.7 ,  1.22, -0.26],
       [ 0.28, -0.29,  0.6 ,  0.34, -0.06,  0.96,  0.01, -0.01, -0.41,
         0.39,  0.09,  0.1 ,  0.18,  0.1 ,  0.96,  0.42, -0.15, -0.69,
        -0.19, -0.62, -0.12, -0.22,  0.61, -0.03,  0.13, -0.02,  0.04,
         0.69, -0.02, -0.55],
       [-0.05,  1.04,  0.14,  1.38, -0.07,  0.88, -0.03, -0.14, -0.69,
        -0.54, -0.41,  0.12,  0.18,  0.23,  0.42,  2.59, -1.24, -0.2 ,
        -0.09, -0.53, -0.52, -0.14,  0.6 ,  0.03, -0.73, -0.14,  0.08,
        -0.74,  0.88, -1.  ],
       [-0.89,  0.03, -0.51, -0.62, -0.46, -1.24, -0.06,  0.04, -0.33,
         0.92,  0.15,  0.01,  0.25,  0.81, -0.15, -1.24,  2.97,  0.23,
         0.28,  1.14,  0.26,  0.27, -0.62,  0.1 ,  0.06, -0.01, -0.42,
         0.95,  0.27, -0.37],
       [-1.09,  0.34, -1.24, -0.29, -0.18, -0.18, -0.04, -0.08,  0.46,
         0.22, -0.31, -0.21,  0.09, -0.21, -0.69, -0.2 ,  0.23,  2.77,
         0.5 ,  0.19,  0.15,  0.6 , -1.25,  0.01, -0.1 , -0.12, -0.5 ,
        -1.12, -0.11, -0.67],
       [ 0.14,  0.55,  1.05,  0.33,  0.29, -1.17, -0.12, -0.1 , -0.38,
        -0.41, -0.3 , -0.09, -0.16, -0.1 , -0.19, -0.09,  0.28,  0.5 ,
         3.37, -0.11,  0.71, -0.4 , -0.32,  0.4 , -0.67, -0.01, -0.24,
        -0.48, -1.39, -0.66],
       [-1.83,  0.9 ,  1.28, -1.24,  0.12, -1.07, -0.17, -0.04,  0.72,
         0.32,  0.09, -0.13,  0.1 , -0.1 , -0.62, -0.53,  1.14,  0.19,
        -0.11,  4.13, -0.05,  0.13,  0.23,  0.44, -0.2 , -0.31, -0.97,
        -1.92,  1.56,  1.45],
       [-0.1 , -0.39,  0.95,  0.42,  0.21, -0.82, -0.04,  0.03,  0.15,
        -0.61, -0.71,  0.1 ,  0.06,  0.5 , -0.12, -0.52,  0.26,  0.15,
         0.71, -0.05,  1.37,  0.26,  0.45, -0.39,  0.42,  0.01,  0.18,
        -0.77, -0.7 ,  0.33],
       [-0.76, -0.64, -0.23,  1.08,  0.1 , -0.51, -0.14,  0.01, -0.38,
        -0.3 , -0.43,  0.12,  0.1 , -0.25, -0.22, -0.14,  0.27,  0.6 ,
        -0.4 ,  0.13,  0.26,  1.49,  0.41,  0.11, -0.12, -0.03, -0.33,
         0.03, -0.18, -0.08],
       [ 0.36, -1.  ,  2.24,  1.14, -0.21,  1.69, -0.01,  0.06, -1.56,
        -0.14,  0.33,  0.46, -0.01,  0.02,  0.61,  0.6 , -0.62, -1.25,
        -0.32,  0.23,  0.45,  0.41,  4.4 , -0.03, -0.13,  0.11, -0.31,
        -0.29,  1.69,  0.95],
       [ 0.24, -0.02, -0.61,  0.22, -0.26, -1.13, -0.08, -0.06,  0.09,
         0.72,  0.3 , -0.16, -0.03, -0.33, -0.03,  0.03,  0.1 ,  0.01,
         0.4 ,  0.44, -0.39,  0.11, -0.03,  1.36, -0.56, -0.23,  0.14,
         1.07,  0.65,  0.51],
       [-0.04, -0.64,  0.64, -1.03, -0.05,  0.82,  0.07,  0.01,  0.53,
        -0.13, -0.07, -0.03, -0.07,  0.23,  0.13, -0.73,  0.06, -0.1 ,
        -0.67, -0.2 ,  0.42, -0.12, -0.13, -0.56,  1.27,  0.05,  0.34,
        -0.34, -0.73,  0.23],
       [-0.18, -0.01, -0.  , -0.23,  0.08,  0.49,  0.06,  0.1 , -0.17,
        -0.57, -0.14, -0.11, -0.11, -0.04, -0.02, -0.14, -0.01, -0.12,
        -0.01, -0.31,  0.01, -0.03,  0.11, -0.23,  0.05,  0.69,  0.06,
        -0.16, -0.48, -0.09],
       [ 0.94, -0.11, -0.52,  0.2 , -0.26, -0.43, -0.01,  0.01,  0.52,
         0.01,  0.23, -0.11, -0.2 ,  0.27,  0.04,  0.08, -0.42, -0.5 ,
        -0.24, -0.97,  0.18, -0.33, -0.31,  0.14,  0.34,  0.06,  1.29,
         0.96, -0.28,  0.17],
       [ 0.95, -0.89, -1.92,  1.36, -0.99, -0.5 , -0.03,  0.12,  0.31,
         2.19,  1.4 , -0.11, -0.14, -0.7 ,  0.69, -0.74,  0.95, -1.12,
        -0.48, -1.92, -0.77,  0.03, -0.29,  1.07, -0.34, -0.16,  0.96,
         5.81, -0.35, -0.59],
       [ 1.01,  1.11, -1.38, -0.23, -0.23, -0.04,  0.14, -0.03, -0.77,
         1.69,  1.33,  0.49,  0.07,  1.22, -0.02,  0.88,  0.27, -0.11,
        -1.39,  1.56, -0.7 , -0.18,  1.69,  0.65, -0.73, -0.48, -0.28,
        -0.35,  4.81,  1.05],
       [ 0.45, -0.67,  0.45, -0.91,  0.04, -1.21,  0.09,  0.05,  0.71,
        -0.38, -0.  ,  0.08, -0.02, -0.26, -0.55, -1.  , -0.37, -0.67,
        -0.66,  1.45,  0.33, -0.08,  0.95,  0.51,  0.23, -0.09,  0.17,
        -0.59,  1.05,  3.02]])
print "Mu = %s" % Mu
print "Sigma = \n %s" % Sigma

ts_rv = timer()
rv = scipy.stats.multivariate_normal(mean=Mu, cov=Sigma, allow_singular=True)
X = rv.rvs(size=M)
te_rv = timer()

ctp = te_rv - ts_rv


Mu = [ 0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.]
Sigma = 
 [[ 4.09 -0.89 -1.12 -0.03  0.21 -0.88  0.15  0.04 -1.01  0.88  0.98  0.26
  -0.31  0.94  0.28 -0.05 -0.89 -1.09  0.14 -1.83 -0.1  -0.76  0.36  0.24
  -0.04 -0.18  0.94  0.95  1.01  0.45]
 [-0.89  3.17 -0.78 -0.32  0.6   0.34  0.1  -0.08  0.6  -0.1   0.59  0.24
  -0.22  0.55 -0.29  1.04  0.03  0.34  0.55  0.9  -0.39 -0.64 -1.   -0.02
  -0.64 -0.01 -0.11 -0.89  1.11 -0.67]
 [-1.12 -0.78  4.73  0.01  0.2   1.36 -0.19 -0.05 -0.2  -1.22 -0.82 -0.01
   0.08 -0.55  0.6   0.14 -0.51 -1.24  1.05  1.28  0.95 -0.23  2.24 -0.61
   0.64 -0.   -0.52 -1.92 -1.38  0.45]
 [-0.03 -0.32  0.01  3.83  0.02 -1.1  -0.22  0.07 -0.59 -0.2  -0.77  0.1
   0.26 -0.25  0.34  1.38 -0.62 -0.29  0.33 -1.24  0.42  1.08  1.14  0.22
  -1.03 -0.23  0.2   1.36 -0.23 -0.91]
 [ 0.21  0.6   0.2   0.02  1.15 -0.1   0.05  0.05 -0.08 -0.6  -0.09  0.22
  -0.07  0.15 -0.06 -0.07 -0.46 -0.18  0.29  0.12  0.21  0.1  -0.21 -0.26
  -0.05  0.08 -0.26 -0.99 -0.23  0.04]
 [-0.88  0.34  1.36 -1.1  -0.1   5.78  0.2  -0.04 -0.64 -0.31  1.06  0.32
  -0.16 -0.71  0.96  0.88 -1.24 -0.18 -1.17 -1.07 -0.82 -0.51  1.69 -1.13
   0.82  0.49 -0.43 -0.5  -0.04 -1.21]
 [ 0.15  0.1  -0.19 -0.22  0.05  0.2   0.2   0.02  0.03 -0.    0.02  0.05
  -0.03  0.14  0.01 -0.03 -0.06 -0.04 -0.12 -0.17 -0.04 -0.14 -0.01 -0.08
   0.07  0.06 -0.01 -0.03  0.14  0.09]
 [ 0.04 -0.08 -0.05  0.07  0.05 -0.04  0.02  0.17  0.04  0.04 -0.   -0.05
  -0.   -0.01 -0.01 -0.14  0.04 -0.08 -0.1  -0.04  0.03  0.01  0.06 -0.06
   0.01  0.1   0.01  0.12 -0.03  0.05]
 [-1.01  0.6  -0.2  -0.59 -0.08 -0.64  0.03  0.04  2.68  0.36 -0.12 -0.44
  -0.24 -0.57 -0.41 -0.69 -0.33  0.46 -0.38  0.72  0.15 -0.38 -1.56  0.09
   0.53 -0.17  0.52  0.31 -0.77  0.71]
 [ 0.88 -0.1  -1.22 -0.2  -0.6  -0.31 -0.    0.04  0.36  3.09  1.42 -0.15
  -0.14  0.29  0.39 -0.54  0.92  0.22 -0.41  0.32 -0.61 -0.3  -0.14  0.72
  -0.13 -0.57  0.01  2.19  1.69 -0.38]
 [ 0.98  0.59 -0.82 -0.77 -0.09  1.06  0.02 -0.   -0.12  1.42  2.2   0.27
  -0.42 -0.12  0.09 -0.41  0.15 -0.31 -0.3   0.09 -0.71 -0.43  0.33  0.3
  -0.07 -0.14  0.23  1.4   1.33 -0.  ]
 [ 0.26  0.24 -0.01  0.1   0.22  0.32  0.05 -0.05 -0.44 -0.15  0.27  0.55
   0.01  0.29  0.1   0.12  0.01 -0.21 -0.09 -0.13  0.1   0.12  0.46 -0.16
  -0.03 -0.11 -0.11 -0.11  0.49  0.08]
 [-0.31 -0.22  0.08  0.26 -0.07 -0.16 -0.03 -0.   -0.24 -0.14 -0.42  0.01
   0.55  0.02  0.18  0.18  0.25  0.09 -0.16  0.1   0.06  0.1  -0.01 -0.03
  -0.07 -0.11 -0.2  -0.14  0.07 -0.02]
 [ 0.94  0.55 -0.55 -0.25  0.15 -0.71  0.14 -0.01 -0.57  0.29 -0.12  0.29
   0.02  1.97  0.1   0.23  0.81 -0.21 -0.1  -0.1   0.5  -0.25  0.02 -0.33
   0.23 -0.04  0.27 -0.7   1.22 -0.26]
 [ 0.28 -0.29  0.6   0.34 -0.06  0.96  0.01 -0.01 -0.41  0.39  0.09  0.1
   0.18  0.1   0.96  0.42 -0.15 -0.69 -0.19 -0.62 -0.12 -0.22  0.61 -0.03
   0.13 -0.02  0.04  0.69 -0.02 -0.55]
 [-0.05  1.04  0.14  1.38 -0.07  0.88 -0.03 -0.14 -0.69 -0.54 -0.41  0.12
   0.18  0.23  0.42  2.59 -1.24 -0.2  -0.09 -0.53 -0.52 -0.14  0.6   0.03
  -0.73 -0.14  0.08 -0.74  0.88 -1.  ]
 [-0.89  0.03 -0.51 -0.62 -0.46 -1.24 -0.06  0.04 -0.33  0.92  0.15  0.01
   0.25  0.81 -0.15 -1.24  2.97  0.23  0.28  1.14  0.26  0.27 -0.62  0.1
   0.06 -0.01 -0.42  0.95  0.27 -0.37]
 [-1.09  0.34 -1.24 -0.29 -0.18 -0.18 -0.04 -0.08  0.46  0.22 -0.31 -0.21
   0.09 -0.21 -0.69 -0.2   0.23  2.77  0.5   0.19  0.15  0.6  -1.25  0.01
  -0.1  -0.12 -0.5  -1.12 -0.11 -0.67]
 [ 0.14  0.55  1.05  0.33  0.29 -1.17 -0.12 -0.1  -0.38 -0.41 -0.3  -0.09
  -0.16 -0.1  -0.19 -0.09  0.28  0.5   3.37 -0.11  0.71 -0.4  -0.32  0.4
  -0.67 -0.01 -0.24 -0.48 -1.39 -0.66]
 [-1.83  0.9   1.28 -1.24  0.12 -1.07 -0.17 -0.04  0.72  0.32  0.09 -0.13
   0.1  -0.1  -0.62 -0.53  1.14  0.19 -0.11  4.13 -0.05  0.13  0.23  0.44
  -0.2  -0.31 -0.97 -1.92  1.56  1.45]
 [-0.1  -0.39  0.95  0.42  0.21 -0.82 -0.04  0.03  0.15 -0.61 -0.71  0.1
   0.06  0.5  -0.12 -0.52  0.26  0.15  0.71 -0.05  1.37  0.26  0.45 -0.39
   0.42  0.01  0.18 -0.77 -0.7   0.33]
 [-0.76 -0.64 -0.23  1.08  0.1  -0.51 -0.14  0.01 -0.38 -0.3  -0.43  0.12
   0.1  -0.25 -0.22 -0.14  0.27  0.6  -0.4   0.13  0.26  1.49  0.41  0.11
  -0.12 -0.03 -0.33  0.03 -0.18 -0.08]
 [ 0.36 -1.    2.24  1.14 -0.21  1.69 -0.01  0.06 -1.56 -0.14  0.33  0.46
  -0.01  0.02  0.61  0.6  -0.62 -1.25 -0.32  0.23  0.45  0.41  4.4  -0.03
  -0.13  0.11 -0.31 -0.29  1.69  0.95]
 [ 0.24 -0.02 -0.61  0.22 -0.26 -1.13 -0.08 -0.06  0.09  0.72  0.3  -0.16
  -0.03 -0.33 -0.03  0.03  0.1   0.01  0.4   0.44 -0.39  0.11 -0.03  1.36
  -0.56 -0.23  0.14  1.07  0.65  0.51]
 [-0.04 -0.64  0.64 -1.03 -0.05  0.82  0.07  0.01  0.53 -0.13 -0.07 -0.03
  -0.07  0.23  0.13 -0.73  0.06 -0.1  -0.67 -0.2   0.42 -0.12 -0.13 -0.56
   1.27  0.05  0.34 -0.34 -0.73  0.23]
 [-0.18 -0.01 -0.   -0.23  0.08  0.49  0.06  0.1  -0.17 -0.57 -0.14 -0.11
  -0.11 -0.04 -0.02 -0.14 -0.01 -0.12 -0.01 -0.31  0.01 -0.03  0.11 -0.23
   0.05  0.69  0.06 -0.16 -0.48 -0.09]
 [ 0.94 -0.11 -0.52  0.2  -0.26 -0.43 -0.01  0.01  0.52  0.01  0.23 -0.11
  -0.2   0.27  0.04  0.08 -0.42 -0.5  -0.24 -0.97  0.18 -0.33 -0.31  0.14
   0.34  0.06  1.29  0.96 -0.28  0.17]
 [ 0.95 -0.89 -1.92  1.36 -0.99 -0.5  -0.03  0.12  0.31  2.19  1.4  -0.11
  -0.14 -0.7   0.69 -0.74  0.95 -1.12 -0.48 -1.92 -0.77  0.03 -0.29  1.07
  -0.34 -0.16  0.96  5.81 -0.35 -0.59]
 [ 1.01  1.11 -1.38 -0.23 -0.23 -0.04  0.14 -0.03 -0.77  1.69  1.33  0.49
   0.07  1.22 -0.02  0.88  0.27 -0.11 -1.39  1.56 -0.7  -0.18  1.69  0.65
  -0.73 -0.48 -0.28 -0.35  4.81  1.05]
 [ 0.45 -0.67  0.45 -0.91  0.04 -1.21  0.09  0.05  0.71 -0.38 -0.    0.08
  -0.02 -0.26 -0.55 -1.   -0.37 -0.67 -0.66  1.45  0.33 -0.08  0.95  0.51
   0.23 -0.09  0.17 -0.59  1.05  3.02]]

Link between paper and code

\begin{equation*} \ell(x) = \sum_k x_k + \frac{1}{2} \sum_k (x_k^+)^2 \end{equation*}

We need to solve the following system:

\begin{equation*} \begin{cases} \lambda \left[ 1 + \mathbb{E} \left( \left(X_k - m_k^* \right)^+ \right) \right] = 1 \text{ for all $k$} \\ \sum_k \mathbb{E} \left( X_k - m_k^* \right) + \sum_k \frac{1}{2} \mathbb{E} \left( \left[ \left( X_k - m_k^* \right)^+ \right]^2 \right) = c \end{cases} \end{equation*}

Definitions from the paper:

\begin{equation*} \begin{cases} \lambda \left[ 1 + f_k \left( m_k^* \right) \right] = 1 \text{ for all $k$} \\ \sum_k \left( e_k \left( m_k^* \right) + \frac{1}{2} g_k \left( m_k^* \right) \right) = c \end{cases} \end{equation*}

In [5]:
def e(index, x):
    mu = Mu[index]
    
    return mu - x

In [6]:
def f(k, x):
    global X
    Xk = X[:, k]
    Xkmx = Xk - x    
    
    Xkmxp = numpy.maximum(Xkmx, 0.)
    
    return numpy.mean(Xkmxp)

In [7]:
def g(k, x):
    global X
    Xk = X[:, k]
    Xkmx = Xk - x    
    
    Xkmxp = numpy.maximum(Xkmx, 0.)
    
    Xkmxp2 = numpy.square(Xkmxp)
    
    return numpy.mean(Xkmxp2)

In [8]:
class WrapperFunc(object):
    def __init__(self, func, indexes):
        self._i = indexes
        self._f = func
        
    def __call__(self, *x):
        tmp_args = self._i + list(x)
        return self._f(*tmp_args)

Put the variables and functions to the remote engines


In [9]:
def init_funcs():
    funcs = dict()
    
    for k in range(N):
        ek = WrapperFunc(e, [k])
        fk = WrapperFunc(f, [k])
        gk = WrapperFunc(g, [k])
    
        funcs[k] = {"e": ek, "f": fk, "g": gk}
        
    return funcs

In [10]:
def prepare_parameterized_funcs(dict_funcs):
    res = dict()
        
    for k, v in dict_funcs.iteritems():
        for kk, vv in v.iteritems():
            if isinstance(vv, dict):
                for j, f in vv.iteritems():
                    key = "%s_%s_%s" % (kk[:-1], j, k)
                    res[key] = (f, j, k)
            else:
                key = "%s_%s" % (kk, k)
                res[key] = (vv, k)
        
    return res

In [11]:
def run_par_func(param):
    f = param[0]
    vm = []
    for i_m in param[1:]:
        vm.append(M[i_m])
    
    return f(*vm)

In [12]:
funcs = init_funcs()
flatten_funcs = prepare_parameterized_funcs(funcs)

In [13]:
from timeit import default_timer as timer

In [14]:
from scipy.optimize import fsolve

def optimize(c, **kwargs):
    global N
    
    __results = {key: dict() for key in "efg"}
        
    def fill_results(lbl, res):
        m_lbl = lbl.split("_")
        
        fname = m_lbl[0]
        k = int(m_lbl[-1])
        
        if len(m_lbl) == 3:
            j = int(m_lbl[1])
            if k not in __results[fname]:
                __results[fname][k] = dict()
                
            __results[fname][k][j] = res
        else:
            __results[fname][k] = res
    
    def objective(v_x):
        _lambda = v_x[-1]
        
        # Local version
        global M
        M = numpy.clip(v_x, LOW_BOUND, UPP_BOUND)
        f_res = map(run_par_func, flatten_funcs.values())
        
        for lbl, fx in zip(flatten_funcs.keys(), f_res):
            fill_results(lbl, fx)
            
        last = 0.
        res = numpy.zeros(N + 1)
        
        for k in range(N):
            res_k = 1 + __results["f"][k]
            res[k] = (_lambda * res_k - 1.)**2
            
            last += __results["e"][k] + .5 * __results["g"][k]
                    
        res[-1] = (last - c)**2
        
        #print M
        #print res
        #print
        
        return res
    
    guess = numpy.zeros(N + 1)
    if 'guess' in kwargs and kwargs['guess'] is not None:
        guess = kwargs.pop('guess')
        
    return fsolve(objective, guess, **kwargs)

In [15]:
c = 1.

guess = numpy.zeros(N+1)

In [16]:
tic = timer()

continue_ = True
__i = 1
l_N = 0
while continue_:
    print "iteration nb %i" % __i
    optim_result = optimize(c, guess=guess, maxfev=1000, full_output=True)
    continue_ = optim_result[2] != 1
    if continue_:
        guess = optim_result[0]
        __i += 1
        l_N += optim_result[1]["nfev"]
        print
    
toc_m_tic = timer() - tic

print optim_result[-1]


iteration nb 1
The solution converged.

In [17]:
m_star = optim_result[0][:-1]
lb_star = optim_result[0][-1]
cto = toc_m_tic

print "Time of optimization: %.2f sec" % (cto)
print "m star = %s" % m_star
print "lambda star = %s" % lb_star


Time of optimization: 439.15 sec
m star = [ 0.83712089  0.58221694  1.00522252  0.76898341 -0.05819226  1.26867332
 -0.4136241  -0.42363226  0.43745752  0.56097767  0.2920662  -0.279692
 -0.27845876  0.22110763 -0.12162183  0.41485895  0.5294553   0.4651966
  0.63949773  0.847902    0.02051029  0.06061764  0.92403066  0.01916246
 -0.01390447 -0.22607799 -0.00587954  1.28038082  1.02505175  0.54083552]
lambda star = 0.686585125907

In [18]:
l_N += optim_result[1]["nfev"]

print "l(%i) = %i" % (N, l_N)
print "function evaluated at the output = %s" % optim_result[1]["fvec"]


l(30) = 216
function evaluated at the output = [  8.95945710e-17   9.03304188e-17   8.87053542e-17   8.78487177e-17
   8.80847185e-17   9.02107059e-17   9.06011911e-17   9.05966470e-17
   8.87893479e-17   8.87207760e-17   8.86168497e-17   9.00880080e-17
   9.01057733e-17   8.89788780e-17   8.95671662e-17   8.90526286e-17
   8.88647405e-17   8.90454814e-17   8.87556234e-17   8.83933527e-17
   8.90843346e-17   8.91142478e-17   8.90146182e-17   8.91609237e-17
   8.92162967e-17   9.00691403e-17   8.94165425e-17   8.87382135e-17
   8.88176817e-17   8.87358311e-17   2.97389459e-15]

Just need to copy the results in Excel:


In [19]:
print m_star
print
print "%f \n %i \n %.2f \n %.2f" % (lb_star, l_N, ctp, cto)


[ 0.83712089  0.58221694  1.00522252  0.76898341 -0.05819226  1.26867332
 -0.4136241  -0.42363226  0.43745752  0.56097767  0.2920662  -0.279692
 -0.27845876  0.22110763 -0.12162183  0.41485895  0.5294553   0.4651966
  0.63949773  0.847902    0.02051029  0.06061764  0.92403066  0.01916246
 -0.01390447 -0.22607799 -0.00587954  1.28038082  1.02505175  0.54083552]

0.686585 
 216 
 1.87 
 439.15